Hcaptcha Solver with Browser Trainer(Automatically solves Hcaptcha in browser)

Hcaptcha Solver in Browser | Automatically solves Hcaptcha in browser

当前为 2024-06-19 提交的版本,查看 最新版本

  1. // ==UserScript==
  2. // @name Hcaptcha Solver with Browser Trainer(Automatically solves Hcaptcha in browser)
  3. // @namespace Hcaptcha Solver
  4. // @version 10.0
  5. // @description Hcaptcha Solver in Browser | Automatically solves Hcaptcha in browser
  6. // @match https://*.hcaptcha.com/*hcaptcha-challenge*
  7. // @match https://*.hcaptcha.com/*checkbox*
  8. // @grant GM_xmlhttpRequest
  9. // @grant GM_setValue
  10. // @grant GM_getValue
  11. // @run-at document-start
  12. // @connect www.imageidentify.com
  13. // @connect https://cdnjs.cloudflare.com
  14. // @connect https://cdn.jsdelivr.net
  15. // @connect https://unpkg.com
  16. // @connect https://*.hcaptcha.com/*
  17. // @require https://unpkg.com/jimp@0.5.2/browser/lib/jimp.min.js
  18. // @require https://cdnjs.cloudflare.com/ajax/libs/tesseract.js/2.0.0-alpha.2/tesseract.min.js
  19. // @require https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@3.13.0/dist/tf.min.js
  20. // @require https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd@2.2.2/dist/coco-ssd.min.js
  21. // @require https://cdn.jsdelivr.net/npm/@tensorflow-models/mobilenet@2.1.0/dist/mobilenet.min.js
  22.  
  23. /*
  24. ██╗░░██╗░█████╗░░█████╗░██████╗░████████╗░█████╗░██╗░░██╗░█████╗░  ░██████╗░█████╗░██╗░░░░░██╗░░░██╗███████╗██████╗░
  25. ██║░░██║██╔══██╗██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██║░░██║██╔══██╗  ██╔════╝██╔══██╗██║░░░░░██║░░░██║██╔════╝██╔══██╗
  26. ███████║██║░░╚═╝███████║██████╔╝░░░██║░░░██║░░╚═╝███████║███████║  ╚█████╗░██║░░██║██║░░░░░╚██╗░██╔╝█████╗░░██████╔╝
  27. ██╔══██║██║░░██╗██╔══██║██╔═══╝░░░░██║░░░██║░░██╗██╔══██║██╔══██║  ░╚═══██╗██║░░██║██║░░░░░░╚████╔╝░██╔══╝░░██╔══██╗
  28. ██║░░██║╚█████╔╝██║░░██║██║░░░░░░░░██║░░░╚█████╔╝██║░░██║██║░░██║  ██████╔╝╚█████╔╝███████╗░░╚██╔╝░░███████╗██║░░██║
  29. ╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚═╝░░░░░░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚═╝░░╚═╝  ╚═════╝░░╚════╝░╚══════╝░░░╚═╝░░░╚══════╝╚═╝░░╚═╝
  30. */
  31. /** Note: This script is solely intended for the use of educational purposes only and not to abuse any website.
  32. */
  33.  
  34. // ==/UserScript==
  35. (async function() {
  36.  
  37. //TODO: Enable debug mode to print console logs
  38. //TODO: Refactor Code for different models
  39. 'use strict';
  40. var selectedImageCount = 0;
  41. var tensorFlowModel = undefined;
  42. var tensorFlowMobileNetModel = undefined;
  43. var worker = undefined;
  44.  
  45. var identifiedObjectsList = [];
  46. var exampleImageList = [];
  47. var identifyObjectsFromImagesCompleted = false;
  48. var currentExampleUrls = [];
  49.  
  50. //Default Language for hcaptcha
  51. const LANG_ENGLISH = "English"
  52. const DEFAULT_LANGUAGE = LANG_ENGLISH;
  53. const ENABLE_DEFAULT_LANGUAGE = true;
  54.  
  55. //Guess/Match New Images
  56. const MATCH_IMAGES_USING_TRAINER = false;
  57. const GUESS_NEW_IMAGE_TYPE = false;
  58.  
  59. //Node Selectors
  60. const CHECK_BOX = "#checkbox";
  61. const SUBMIT_BUTTON = ".button-submit";
  62. const TASK_IMAGE_BORDER = ".task-image .border";
  63. const IMAGE = ".task-image .image";
  64. const TASK_IMAGE = ".task-image";
  65. const PROMPT_TEXT = ".prompt-text";
  66. const NO_SELECTION = ".no-selection";
  67. const CHALLENGE_INPUT_FIELD = ".challenge-input .input-field";
  68. const CHALLENGE_INPUT = ".challenge-input";
  69. const CHALLENGE_IMAGE = ".challenge-example .image .image";
  70. const IMAGE_FOR_OCR = ".challenge-image .zoom-image";
  71. const LANGUAGE_SELECTOR = "#language-list .scroll-container .option span";
  72.  
  73. //Attributes
  74. const ARIA_CHECKED = "aria-checked";
  75. const ARIA_HIDDEN = "aria-hidden";
  76.  
  77. //Values that can be changed for other languages
  78. const AIRPLANE = "airplane";
  79. const BICYCLE = "bicycle";
  80. const BOAT = "boat";
  81. const BUS = "bus";
  82. const CAR = "car";
  83. const MOTORBUS = "motorbus";
  84. const MOTORCYCLE = "motorcycle";
  85. const SURFBOARD = "surfboard";
  86. const TRAIN = "train";
  87. const TRUCK = "truck";
  88. const TRIMARAN = "trimaran";
  89. const SEAPLANE = "seaplane";
  90. const SPEEDBOAT = "speedboat";
  91.  
  92. //Living Room Objects
  93. const BED = "bed";
  94. const BOOK = "book";
  95. const CHAIR = "chair";
  96. const CLOCK = "clock";
  97. const COUCH = "couch";
  98. const DINING_TABLE = "dining table";
  99. const POTTED_PLANT = "potted plant";
  100. const TV = "tv";
  101.  
  102. //Animals
  103. const ZEBRA = "zebra";
  104. const CAT = "cat";
  105. const DOG = "dog";
  106.  
  107. // Vertical River
  108. const VALLEY = "valley";
  109. const VERTICAL_RIVER = "vertical river";
  110.  
  111.  
  112. const LIVING_ROOM_TYPES = [BED, BOOK, CHAIR, CLOCK, COUCH, DINING_TABLE, POTTED_PLANT, TV];
  113. const TRANSPORT_TYPES = [AIRPLANE, BICYCLE, BOAT, BUS, CAR, MOTORBUS, MOTORCYCLE, SEAPLANE, SPEEDBOAT, SURFBOARD, TRAIN, TRIMARAN, TRUCK];
  114. const ANIMAL_TYPES = [ZEBRA, CAT, DOG];
  115.  
  116. const SENTENCE_TEXT_A = "Please click each image containing a ";
  117. const SENTENCE_TEXT_AN = "Please click each image containing an ";
  118. const LANGUAGE_FOR_OCR = "eng";
  119.  
  120. // Option to override the default image matching
  121. // Enabling this by default
  122. const ENABLE_TENSORFLOW = true;
  123.  
  124. // Max Skips that can be done while solving the captcha
  125. // This is likely not to happen, if it occurs retry for new images
  126. const MAX_SKIPS = 10;
  127. var skipCount = 0;
  128.  
  129. var USE_MOBILE_NET = false;
  130. var USE_COLOUR_PATTERN = false;
  131. var NEW_WORD_IDENTIFIED = false;
  132.  
  133. //Probablility for objects
  134. var probabilityForObject = new Map();
  135. probabilityForObject.set("speedboat", 0.14);
  136. probabilityForObject.set("fireboat", 0.4);
  137. probabilityForObject.set("boathouse", 0.4);
  138. probabilityForObject.set("submarine", 0.5);
  139. probabilityForObject.set("printer", 0.05);
  140. probabilityForObject.set("stretcher", 0.05);
  141. probabilityForObject.set("rotisserie", 0.02);
  142. probabilityForObject.set("spatula", 0.05);
  143.  
  144.  
  145. String.prototype.includesOneOf = function(arrayOfStrings) {
  146.  
  147. //If this is not an Array, compare it as a String
  148. if (!Array.isArray(arrayOfStrings)) {
  149. return this.toLowerCase().includes(arrayOfStrings.toLowerCase());
  150. }
  151.  
  152. for (var i = 0; i < arrayOfStrings.length; i++) {
  153. if ((arrayOfStrings[i].substr(0, 1) == "=" && this.toLowerCase() == arrayOfStrings[i].substr(1).toLowerCase()) ||
  154. (this.toLowerCase().includes(arrayOfStrings[i].toLowerCase()))) {
  155. return true;
  156. }
  157. }
  158. return false;
  159. }
  160.  
  161. String.prototype.equalsOneOf = function(arrayOfStrings) {
  162.  
  163. //If this is not an Array, compare it as a String
  164. if (!Array.isArray(arrayOfStrings)) {
  165. return this.toLowerCase() == arrayOfStrings.toLowerCase();
  166. }
  167.  
  168. for (var i = 0; i < arrayOfStrings.length; i++) {
  169. if ((arrayOfStrings[i].substr(0, 1) == "=" && this.toLowerCase() == arrayOfStrings[i].substr(1).toLowerCase()) ||
  170. (this.toLowerCase() == arrayOfStrings[i].toLowerCase())) {
  171. return true;
  172. }
  173. }
  174. return false;
  175. }
  176.  
  177.  
  178.  
  179. // This script uses imageidentify API (wolfram) . You may also use TensorFlow.js, Yolo latest version to recognize common objects.
  180. //(When the cloud service is available for yolo, we can switch the API endpoint). Accuracy varies between Wolfram, Tensorflow and Yolo.
  181. // Use this as a reference to solve recaptcha/other captchas using scripts in browser. This is intended for learning purposes.
  182. // Using TensorFlow as fallback, but this requires good CPU in order to solve quickly.
  183. // CPU utilization and memory utlization may go high when using TensorFlow.js
  184. function matchImages(imageUrl, word, i) {
  185.  
  186. GM_xmlhttpRequest({
  187. method: "POST",
  188. url: "https://www.imageidentify.com/objects/user-26a7681f-4b48-4f71-8f9f-93030898d70d/prd/urlapi/",
  189. headers: {
  190. "Content-Type": "application/x-www-form-urlencoded"
  191. },
  192. data: "image=" + encodeURIComponent(imageUrl),
  193. timeout: 8000,
  194. onload: function(response) {
  195. clickImages(response, imageUrl, word, i)
  196. },
  197. onerror: function(e) {
  198. //Using Fallback TensorFlow
  199. if (e && e.status && e.status != 0) {
  200. console.log(e);
  201. console.log("Using Fallback");
  202. }
  203. matchImagesUsingTensorFlow(imageUrl, word, i);
  204.  
  205. },
  206. ontimeout: function() {
  207. //console.log("Timed out. Using Fallback");
  208. matchImagesUsingTensorFlow(imageUrl, word, i);
  209. },
  210. });
  211.  
  212. }
  213.  
  214. function matchImagesUsingTensorFlow(imageUrl, word, i) {
  215. try {
  216. let img = new Image();
  217. img.crossOrigin = "Anonymous";
  218. img.src = imageUrl;
  219. img.onload = () => {
  220. initializeTensorFlowModel().then(model => model.detect(img))
  221. .then(function(predictions) {
  222. var predictionslen = predictions.length;
  223. for (var j = 0; j < predictionslen; j++) {
  224. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  225. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  226. predictions[j].class.includesOneOf(word)) {
  227. qSelectorAll(TASK_IMAGE)[i].click();
  228. break;
  229. }
  230. }
  231. img.removeAttribute("src");
  232. selectedImageCount = selectedImageCount + 1;
  233. });
  234. }
  235. } catch (err) {
  236. console.log(err.message);
  237. }
  238. }
  239.  
  240.  
  241. function matchImagesUsingTensorFlowMobileNet(imageUrl, word, i) {
  242.  
  243. try {
  244. let img = new Image();
  245. img.crossOrigin = "Anonymous";
  246. img.src = imageUrl;
  247. img.onload = () => {
  248. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  249. .then(function(predictions) {
  250. var predictionslen = predictions.length;
  251. for (var j = 0; j < predictionslen; j++) {
  252. var probability = 0.077;
  253. if (probabilityForObject.get(predictions[j].className)) {
  254. probability = probabilityForObject.get(predictions[j].className);
  255. }
  256.  
  257. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  258. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  259. predictions[j].className.includesOneOf(word) && predictions[j].probability > probability) {
  260. qSelectorAll(TASK_IMAGE)[i].click();
  261. break;
  262. }
  263. }
  264. img.removeAttribute("src");
  265. selectedImageCount = selectedImageCount + 1;
  266. });
  267. }
  268. } catch (err) {
  269. console.log(err.message);
  270. }
  271. }
  272.  
  273.  
  274. // TODO: Generalize this logic
  275. // Identifying this based on the observation of the images seen
  276. // The actual way would be to scan the entire image to find the lake.
  277. // Mobilenet model in browser js identifies the lake but does not provide coordinates
  278. // to identify if it is horizontal or vertical
  279. function matchImageForVerticalRiver(imageUrl, word, i) {
  280.  
  281. Jimp.read(imageUrl).then(function (data) {
  282.  
  283. data.getBase64(Jimp.AUTO, async function (err, src) {
  284. var img = document.createElement("img");
  285. img.setAttribute("src", src);
  286. await img.decode();
  287. var imageHeight = img.height;
  288. var imageWidth = img.width;
  289. var cropHeight = imageHeight - 0.03*imageHeight;
  290. let url = src.replace(/^data:image\/\w+;base64,/, "");
  291. let buffer = new Buffer(url, 'base64');
  292.  
  293. Jimp.read(buffer).then(function (data) {
  294. data.crop(0, cropHeight, imageWidth, imageHeight)
  295. .getBase64(Jimp.AUTO, async function (err, src) {
  296.  
  297. var img = document.createElement("img");
  298. img.src = src;
  299. await img.decode();
  300.  
  301. var c = document.createElement("canvas")
  302. c.width = img.width;
  303. c.height = img.height;
  304. var ctx = c.getContext("2d");
  305. ctx.drawImage(img, 0, 0);
  306.  
  307. var imageData = ctx.getImageData(0, 0, c.width, c.height);
  308. var data = imageData.data;
  309. var count = 0;
  310.  
  311. //Multiple combinations and distances are required for accuracy
  312. for (let i = 0; i < data.length; i+= 4) {
  313. if( (data[i] < 140 && data[i+1] < 110 && data[i+2] > 80 && data[i+3] == 255) ||
  314. (data[i] < 200 && data[i+1] < 200 && data[i+2] > 140 && data[i+3] == 255)){
  315. count++;
  316. }
  317. }
  318.  
  319. if(count > 0.001*(data.length/4) && count < data.length/8) {
  320. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  321. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0) {
  322. qSelectorAll(TASK_IMAGE)[i].click();
  323. }
  324. }
  325.  
  326. img.removeAttribute("src");
  327. selectedImageCount = selectedImageCount + 1;
  328.  
  329. });
  330. });
  331. img.removeAttribute("src");
  332. });
  333. });
  334. }
  335.  
  336.  
  337. // This approach is naive approch to store the images and retrieve
  338. // The accuracy is 100% as long as you store the selected images
  339. // Browser memory is used to store the images and gets cleared if you delete the browser cache and cookies
  340. // You may use this to store images in remote place and retrive for quick access
  341. // This approach is only used during urgent scenarios before training the images
  342. // Image differnce can also be done with the stored images to identify new image based on the existing if they are nearly equal
  343. function matchImagesUsingTrainer(imageUrl, word, i) {
  344.  
  345. Jimp.read(imageUrl).then(function (data) {
  346.  
  347. data.getBase64(Jimp.AUTO, async function (err, src) {
  348. var trainerInterval = setInterval(function(){
  349.  
  350. if (!qSelectorAll(IMAGE)[i] || !(qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) ){
  351. clearInterval(trainerInterval);
  352. return;
  353. }
  354.  
  355. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  356. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 && GM_getValue(src) && GM_getValue(src) == word) {
  357. console.log("Retrieved image from trainer");
  358. selectedImageCount = selectedImageCount + 1;
  359. qSelectorAll(TASK_IMAGE)[i].click();
  360. clearInterval(trainerInterval);
  361. return;
  362. }
  363.  
  364. // Overriding Previously Stored values
  365. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  366. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 1 && GM_getValue(src) && GM_getValue(src) != word) {
  367. console.log("Overriding image in the trainer");
  368. selectedImageCount = selectedImageCount + 1;
  369. GM_setValue(src,word);
  370. console.log("Image Stored into database");
  371. clearInterval(trainerInterval);
  372. return;
  373. }
  374.  
  375. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  376. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 1 && !GM_getValue(src)) {
  377. selectedImageCount = selectedImageCount + 1;
  378. GM_setValue(src,word);
  379. console.log("Image Stored into database");
  380. clearInterval(trainerInterval);
  381. return;
  382.  
  383. }
  384.  
  385. },5000);
  386.  
  387. });
  388. });
  389. }
  390.  
  391.  
  392. //Function to sleep or delay
  393. async function delay(ms) {
  394. return new Promise(resolve => setTimeout(resolve, ms))
  395. }
  396.  
  397. //Different Models can be set later based on usecase
  398. //Ref Models: https://github.com/tensorflow/tfjs-models
  399. async function initializeTensorFlowModel() {
  400. if (!tensorFlowModel) {
  401. tensorFlowModel = await cocoSsd.load();
  402. }
  403. return tensorFlowModel;
  404. }
  405.  
  406. //MobileNet ssd model
  407. async function initializeTensorFlowMobilenetModel() {
  408. if (!tensorFlowMobileNetModel) {
  409. tensorFlowMobileNetModel = await mobilenet.load();
  410. }
  411. return tensorFlowMobileNetModel;
  412. }
  413.  
  414.  
  415. //Initialize TesseractWorker
  416. function initializeTesseractWorker() {
  417. if (!worker) {
  418. worker = new Tesseract.TesseractWorker();
  419. }
  420. }
  421.  
  422. function clickImages(response, imageUrl, word, i) {
  423.  
  424. try {
  425. if (response && response.responseText && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  426. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0) {
  427. var responseJson = JSON.parse(response.responseText);
  428. if (responseJson.identify && responseJson.identify.title && responseJson.identify.title.includesOneOf(word)) {
  429. qSelectorAll(TASK_IMAGE)[i].click();
  430. } else if (responseJson.identify && responseJson.identify.entity && responseJson.identify.entity.includesOneOf(word)) {
  431. qSelectorAll(TASK_IMAGE)[i].click();
  432. } else if (responseJson.identify && responseJson.identify.alternatives) {
  433. var alternatives = JSON.stringify(responseJson.identify.alternatives);
  434. var alternativesJson = JSON.parse(alternatives);
  435.  
  436. for (var key in alternativesJson) {
  437. if (alternativesJson.hasOwnProperty(key)) {
  438. if ((alternativesJson[key].includesOneOf(word) || key.includesOneOf(word))) {
  439. qSelectorAll(TASK_IMAGE)[i].click();
  440. break;
  441. }
  442. }
  443. }
  444. } else {
  445. //No Match found
  446. }
  447.  
  448. selectedImageCount = selectedImageCount + 1;
  449.  
  450. } else {
  451. //console.log("Using Fallback TensorFlow");
  452. matchImagesUsingTensorFlow(imageUrl, word, i);
  453. }
  454.  
  455. } catch (err) {
  456. //Using Fallback TensorFlow
  457. //console.log(err.message);
  458. //console.log("Using Fallback TensorFlow");
  459. matchImagesUsingTensorFlow(imageUrl, word, i);
  460. }
  461. }
  462.  
  463. function qSelectorAll(selector) {
  464. return document.querySelectorAll(selector);
  465. }
  466.  
  467. function qSelector(selector) {
  468. return document.querySelector(selector);
  469. }
  470.  
  471.  
  472. async function getSynonyms(word) {
  473.  
  474. USE_MOBILE_NET = false;
  475. USE_COLOUR_PATTERN = false;
  476. NEW_WORD_IDENTIFIED = false;
  477.  
  478. //TODO: Format this to JSON string
  479. if (word == MOTORBUS || word == BUS) {
  480. word = ['bus', 'motorbus'];
  481. USE_MOBILE_NET = true;
  482. } else if (word == CAR) {
  483. word = ['=car', 'coupe', 'jeep', 'limo', 'sport utility vehicle', 'station wagon', 'hatchback', 'bumper car', 'modelT', 'electric battery', 'cruiser'];
  484. USE_MOBILE_NET = true;
  485. } else if (word == AIRPLANE) {
  486. word = ['airplane', 'plane', 'aircraft', 'aeroplane', 'hangar', 'Airdock', 'JumboJet', 'jetliner', 'stealth fighter', 'field artillery']
  487. USE_MOBILE_NET = true;
  488. } else if (word == TRAIN) {
  489. word = ['train', 'rail', 'cable car', 'locomotive', 'subway station']
  490. USE_MOBILE_NET = true;
  491. } else if (word == BOAT || word == SURFBOARD) {
  492. word = ['=boat', '=barge', 'houseboat', 'boathouse', 'speedboat', '=submarine', 'bobsled', 'catamaran', 'schooner', 'ocean liner', 'lifeboat', 'fireboat', 'yawl', 'pontoon', 'small boat', 'SnowBlower', 'Sea-coast', 'paddlewheel', 'paddle wheel', 'PaddleSteamer', 'Freighter', 'Sternwheeler', 'kayak', 'canoe', 'deck', 'DockingFacility', 'surfboard', '=ship', '=cruise', 'watercraft', 'sail', 'canvas', '=raft']
  493. USE_MOBILE_NET = true;
  494. } else if (word == BICYCLE) {
  495. word = ['bicycle-built-for-two', 'tandem bicycle', 'bicycle', 'tricycle', 'mountain bike', 'AcceleratorPedal', 'macaw', 'knot']
  496. USE_MOBILE_NET = true;
  497. } else if (word == MOTORCYCLE) {
  498. word = ['moped', 'motor scooter', 'scooter', 'motorcycle', 'windshield', 'dashboard']
  499. USE_MOBILE_NET = true;
  500. } else if (word == TRUCK) {
  501. word = ['truck', 'cargocontainer', 'bazooka']
  502. USE_MOBILE_NET = true;
  503. } else if (word == TRIMARAN || word == SPEEDBOAT || word == SEAPLANE) {
  504. word = ['spatula', 'can opener', 'tin opener', 'monitor', 'screen', 'stretcher', 'printer', 'nail', 'mousetrap', 'TRIMARAN', 'space shuttle', 'ski', 'rotisserie', 'geyser', 'plate rack']
  505. USE_MOBILE_NET = true;
  506. } else if (word.includesOneOf(LIVING_ROOM_TYPES)) {
  507. word = ['bed', 'couch', 'chair', 'potted plant', 'dining table', 'clock', 'tv', 'book']
  508. } else if (word == ZEBRA) {
  509. word = ['zebra']
  510. } else if (word == CAT) {
  511. word = ['cat']
  512. USE_MOBILE_NET = true;
  513. } else if (word == DOG) {
  514. word = ['dog']
  515. } else if (word == VALLEY || word == VERTICAL_RIVER){
  516. word = ['alp','volcano']
  517. USE_COLOUR_PATTERN = true;
  518. } else {
  519. NEW_WORD_IDENTIFIED = true;
  520. console.log("Word does not match. New type identified::" + word);
  521. }
  522.  
  523. return word
  524.  
  525. }
  526.  
  527. function isHidden(el) {
  528. return (el.offsetParent === null)
  529. }
  530.  
  531. if (window.location.href.includes("checkbox")) {
  532. var checkboxInterval = setInterval(function() {
  533. if (!qSelector(CHECK_BOX)) {
  534. //Wait until the checkbox element is visible
  535. } else if (qSelector(CHECK_BOX).getAttribute(ARIA_CHECKED) == "true") {
  536. clearInterval(checkboxInterval);
  537. } else if (!isHidden(qSelector(CHECK_BOX)) && qSelector(CHECK_BOX).getAttribute(ARIA_CHECKED) == "false") {
  538. qSelector(CHECK_BOX).click();
  539. } else {
  540. return;
  541. }
  542.  
  543. }, 5000);
  544. } else {
  545.  
  546. try {
  547. await initializeTesseractWorker();
  548. await initializeTensorFlowModel();
  549. await initializeTensorFlowMobilenetModel();
  550. selectImages();
  551.  
  552. } catch (err) {
  553. console.log(err);
  554. console.log("Tesseract could not be initialized");
  555. }
  556.  
  557. }
  558.  
  559. function selectImagesAfterDelay(delay) {
  560. setTimeout(function() {
  561. selectImages();
  562. }, delay * 1000);
  563. }
  564.  
  565. function triggerEvent(el, type) {
  566. var e = document.createEvent('HTMLEvents');
  567. e.initEvent(type, false, true);
  568. el.dispatchEvent(e);
  569. }
  570.  
  571. function triggerMouseEvent(el, type) {
  572. var e = document.createEvent('MouseEvent');
  573. e.initEvent(type, false, true);
  574. el.dispatchEvent(e);
  575. }
  576.  
  577. // Small hack to select the nodes
  578. function unsure(targetNodeText) {
  579. var targetNode = Array.from(qSelectorAll('div'))
  580. .find(el => el.textContent === targetNodeText);
  581. //Works for now
  582. //TODO: Select clothing
  583. //TODO: Draw boxes around images
  584. if (targetNode) {
  585. triggerMouseEvent(targetNode, 'mousedown');
  586. triggerMouseEvent(targetNode, 'mouseup');
  587. if (qSelector(SUBMIT_BUTTON)) {
  588. qSelector(SUBMIT_BUTTON).click();
  589. }
  590. }
  591. return selectImagesAfterDelay(1);
  592. }
  593.  
  594. function getUrlFromString(urlString) {
  595.  
  596. var imageUrl = urlString.substring(
  597. urlString.indexOf('"') + 1,
  598. urlString.lastIndexOf('"')
  599. );
  600.  
  601. if (!imageUrl || !imageUrl.includes("https")) {
  602. return 0;
  603. }
  604.  
  605. return imageUrl;
  606. }
  607.  
  608.  
  609. function getImageList() {
  610. var imageList = [];
  611. if (qSelectorAll(IMAGE).length > 0) {
  612. for (var i = 0; i < 9; i++) {
  613. var urlString = qSelectorAll(IMAGE)[i].style.background;
  614. var imageUrl = getUrlFromString(urlString);
  615. if (imageUrl == 0) {
  616. //console.log("Image url is empty");
  617. return imageList;
  618. }
  619. imageList[i] = imageUrl;
  620. }
  621. }
  622. return imageList;
  623. }
  624.  
  625. function waitUntilImageSelection() {
  626. var imageIntervalCount = 0;
  627. var imageInterval = setInterval(function() {
  628. imageIntervalCount = imageIntervalCount + 1;
  629. if (selectedImageCount == 9) {
  630. clearInterval(imageInterval);
  631. if (qSelector(SUBMIT_BUTTON)) {
  632. qSelector(SUBMIT_BUTTON).click();
  633. }
  634. return selectImagesAfterDelay(5);
  635. } else if (imageIntervalCount > 8) {
  636. clearInterval(imageInterval);
  637. return selectImages();
  638. } else if(selectedImageCount > 2 && MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED && imageIntervalCount > 4){
  639. clearInterval(imageInterval);
  640. if (qSelector(SUBMIT_BUTTON)) {
  641. qSelector(SUBMIT_BUTTON).click();
  642. }
  643. return selectImagesAfterDelay(5);
  644. } else if(MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED && imageIntervalCount > 6){
  645. clearInterval(imageInterval);
  646. if (qSelector(SUBMIT_BUTTON)) {
  647. qSelector(SUBMIT_BUTTON).click();
  648. }
  649. return selectImagesAfterDelay(5);
  650. }else{
  651.  
  652. }
  653. }, 3000);
  654. }
  655.  
  656. function waitForImagesToAppear() {
  657. var checkImagesSelectedCount = 0;
  658. var waitForImagesInterval = setInterval(function() {
  659. checkImagesSelectedCount = checkImagesSelectedCount + 1;
  660. if (qSelectorAll(IMAGE) && qSelectorAll(IMAGE).length == 9) {
  661. clearInterval(waitForImagesInterval);
  662. return selectImages();
  663. } else if (checkImagesSelectedCount > 60) {
  664. clearInterval(waitForImagesInterval);
  665. } else if (qSelector(CHALLENGE_INPUT_FIELD) && qSelector(NO_SELECTION).getAttribute(ARIA_HIDDEN) != true) {
  666. clearInterval(waitForImagesInterval);
  667. return imageUsingOCR();
  668. } else {
  669. //TODO: Identify Objects for the following (Ex: bed,chair,table etc)
  670. //Ref for clothing: https://www.youtube.com/watch?v=yWwzFnAnrLM, https://www.youtube.com/watch?v=FiNglI1wRNk,https://www.youtube.com/watch?v=oHAkK_9UCQ8
  671. var targetNodeList = ["Yes", "3 or more items of furniture", "Equipped space or room", "Photo is clean, no watermarks, logos or text overlays", "An interior photo of room", "Unsure", "Photo is sharp"];
  672. for (var j = 0; j < targetNodeList.length; j++) {
  673. var targetNode = Array.from(qSelectorAll('div'))
  674. .find(el => el.textContent === targetNodeList[j]);
  675. if (targetNode) {
  676. //console.log("Target Node Found");
  677. clearInterval(waitForImagesInterval);
  678. return unsure(targetNodeList[j]);
  679. }
  680. }
  681. }
  682. }, 5000);
  683. }
  684.  
  685. //TODO: Convert Image to base64 to avoid multiple calls
  686. function preProcessImage(base64Image, imageUrl) {
  687.  
  688. //Darken and Brighten
  689. Jimp.read(base64Image).then(function(data) {
  690. data.color([
  691.  
  692. {
  693. apply: 'darken',
  694. params: [20]
  695. }
  696.  
  697. ]).color([
  698.  
  699. {
  700. apply: 'brighten',
  701. params: [20]
  702. }
  703.  
  704. ])
  705. .greyscale()
  706. .getBase64(Jimp.AUTO, function(err, src) {
  707. var img = document.createElement("img");
  708. img.setAttribute("src", src);
  709.  
  710. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  711. //Remove Image After recognizing
  712. img.removeAttribute("src");
  713. //If null change to other methods
  714. if (data && data.text && data.text.length > 0) {
  715. inputChallenge(postProcessImage(data), imageUrl);
  716. return selectImages();
  717. } else {
  718. preProcessImageMethod2(base64Image, imageUrl);
  719. }
  720. });
  721.  
  722. });
  723. });
  724.  
  725. }
  726.  
  727.  
  728. function preProcessImageMethod2(base64Image, trimageUrl) {
  729.  
  730. //Multi Contrast darken and brighten
  731. Jimp.read(base64Image).then(function(data) {
  732. data.color([
  733.  
  734. {
  735. apply: 'darken',
  736. params: [20]
  737. }
  738.  
  739. ]).contrast(1).color([
  740.  
  741. {
  742. apply: 'brighten',
  743. params: [20]
  744. }
  745.  
  746. ]).contrast(1).greyscale().getBase64(Jimp.AUTO, function(err, src) {
  747. var img = document.createElement("img");
  748. img.setAttribute("src", src);
  749.  
  750. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  751. //Remove Image After recognizing
  752. img.removeAttribute("src");
  753. if (data && data.text && data.text.length > 0) {
  754. inputChallenge(postProcessImage(data), imageUrl);
  755. return selectImages();
  756. } else {
  757. preProcessImageMethod3(base64Image, imageUrl);
  758. }
  759. });
  760. });
  761. });
  762.  
  763. }
  764.  
  765. function preProcessImageMethod3(base64Image, imageUrl) {
  766. //Multi Contrast only brighten
  767. Jimp.read(base64Image).then(function(data) {
  768. data.contrast(1).color([{
  769. apply: 'brighten',
  770. params: [20]
  771. }
  772.  
  773. ])
  774. .contrast(1)
  775. .greyscale()
  776. .getBase64(Jimp.AUTO, function(err, src) {
  777. var img = document.createElement("img");
  778. img.setAttribute("src", src);
  779.  
  780. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  781. //Remove Image After recognizing
  782. img.removeAttribute("src");
  783. if (data && data.text && data.text.length > 0) {
  784. inputChallenge(postProcessImage(data), imageUrl);
  785. return selectImages();
  786. } else {
  787. preProcessImageMethod4(base64Image, imageUrl);
  788. }
  789. });
  790. });
  791. });
  792. }
  793.  
  794. function preProcessImageMethod4(base64Image, imageUrl) {
  795. //Resize the image
  796. Jimp.read(base64Image).then(function(data) {
  797. data.resize(256, Jimp.AUTO)
  798. .quality(60) // set JPEG quality
  799. .greyscale() // set greyscale
  800. .getBase64(Jimp.AUTO, function(err, src) {
  801. var img = document.createElement("img");
  802. img.setAttribute("src", src);
  803.  
  804. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  805. //Remove Image After recognizing
  806. img.removeAttribute("src");
  807. inputChallenge(postProcessImage(data), imageUrl);
  808. return selectImages();
  809. });
  810. });
  811. });
  812.  
  813. }
  814.  
  815. function postProcessImage(data) {
  816. var filterValues = ['\n', '{', '}', '[', ']'];
  817. for (var i = 0; i < filterValues.length; i++) {
  818. data.text = data.text.replaceAll(filterValues[i], "");
  819. }
  820. return data;
  821. }
  822.  
  823. // Using Tesseract to recognize images
  824. function imageUsingOCR() {
  825. try {
  826. //console.log("Image using OCR");
  827. var urlString = qSelector(IMAGE_FOR_OCR).style.background;
  828. var imageUrl = getUrlFromString(urlString);
  829. if (imageUrl == 0) {
  830. return selectImagesAfterDelay(1);
  831. }
  832.  
  833. Jimp.read(imageUrl).then(function(data) {
  834.  
  835. data.getBase64(Jimp.AUTO, function(err, src) {
  836.  
  837. var img = document.createElement("img");
  838. img.setAttribute("src", src);
  839. var base64Image = img.src;
  840.  
  841. preProcessImage(base64Image, imageUrl);
  842.  
  843. })});
  844.  
  845. } catch (err) {
  846. console.log(err.message);
  847. return selectImagesAfterDelay(1);
  848. }
  849. }
  850.  
  851.  
  852. async function convertTextToImage(text) {
  853.  
  854. //Convert Text to image
  855. var canvas = document.createElement("canvas");
  856. var textLength = text.length;
  857. canvas.width = 60 * textLength;
  858. canvas.height = 80;
  859. var ctx = canvas.getContext('2d');
  860. ctx.font = "30px Arial";
  861. ctx.fillText(text, 10, 50);
  862. var img = document.createElement("img");
  863. img.src = canvas.toDataURL();
  864.  
  865. return img;
  866. }
  867.  
  868. async function convertImageToText(img) {
  869.  
  870. await initializeTesseractWorker();
  871.  
  872. //Convert Image to Text
  873. var text = "";
  874. await worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  875. text = data.text;
  876. // console.log("Recognized Text::" + text);
  877. });
  878. return text.trim();
  879. }
  880.  
  881. function areExampleImageUrlsChanged() {
  882.  
  883. var prevExampleUrls = exampleImageList;
  884. currentExampleUrls = [];
  885.  
  886. if (qSelectorAll(CHALLENGE_IMAGE).length > 0) {
  887. for (let i = 0; i < qSelectorAll(CHALLENGE_IMAGE).length; i++) {
  888. var urlString = qSelectorAll(CHALLENGE_IMAGE)[i].style.background;
  889. var imageUrl = getUrlFromString(urlString);
  890. if (imageUrl == 0) {
  891. console.log("Image url is empty, Retrying...");
  892. return true;
  893. }
  894. currentExampleUrls[i] = imageUrl;
  895. }
  896. }
  897.  
  898. if (prevExampleUrls.length != currentExampleUrls.length) {
  899. return true;
  900. }
  901.  
  902. for (let i = 0; i < currentExampleUrls.length; i++) {
  903.  
  904. if (prevExampleUrls[i] != currentExampleUrls[i]) {
  905. return true;
  906. }
  907. }
  908.  
  909. return false;
  910. }
  911.  
  912. async function identifyObjectsFromImages(imageUrlList) {
  913. identifiedObjectsList = [];
  914.  
  915. for (let i = 0; i < imageUrlList.length; i++) {
  916. try {
  917. let img = new Image();
  918. img.crossOrigin = "Anonymous";
  919. img.src = imageUrlList[i];
  920. img.onload = () => {
  921. initializeTensorFlowModel().then(model => model.detect(img))
  922. .then(function(predictions) {
  923. let predictionslen = predictions.length;
  924. let hashSet = new Set();
  925. for (let j = 0; j < predictionslen; j++) {
  926. hashSet.add(predictions[j].class);
  927. }
  928.  
  929. hashSet.forEach((key) => {
  930. identifiedObjectsList.push(key);
  931. });
  932.  
  933. img.removeAttribute("src");
  934.  
  935. if (i == imageUrlList.length - 1) {
  936. identifyObjectsFromImagesCompleted = true;
  937. }
  938.  
  939. })
  940. }
  941. } catch (e) {
  942. console.log(e);
  943. }
  944.  
  945. }
  946.  
  947. }
  948.  
  949. async function identifyObjectsFromImagesUsingMobileNet(imageUrlList) {
  950. identifiedObjectsList = [];
  951.  
  952. for (let i = 0; i < imageUrlList.length; i++) {
  953. try {
  954. let img = new Image();
  955. img.crossOrigin = "Anonymous";
  956. img.src = imageUrlList[i];
  957. img.onload = () => {
  958. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  959. .then(function(predictions) {
  960.  
  961. let predictionslen = predictions.length;
  962. let hashSet = new Set();
  963. for (let j = 0; j < predictionslen; j++) {
  964. if(predictions[j].className.includes(",")){
  965. var multiPredictions = predictions[j].className.split(',');
  966. for(let k=0; k< multiPredictions.length;k++){
  967. hashSet.add(multiPredictions[k].trim());
  968. }
  969. }else{
  970. hashSet.add(predictions[j].className);
  971. }
  972. }
  973.  
  974. hashSet.forEach((key) => {
  975. identifiedObjectsList.push(key);
  976. });
  977.  
  978. img.removeAttribute("src");
  979.  
  980. if (i == imageUrlList.length - 1) {
  981. identifyObjectsFromImagesCompleted = true;
  982. }
  983.  
  984. })
  985. }
  986. } catch (e) {
  987. console.log(e);
  988. }
  989.  
  990. }
  991.  
  992. }
  993.  
  994. async function getWordFromIdentifiedObjects(identifiedObjectsList) {
  995.  
  996. var hashMap = new Map();
  997. for (var i = 0; i < identifiedObjectsList.length; i++) {
  998. if (hashMap.has(identifiedObjectsList[i])) {
  999. hashMap.set(identifiedObjectsList[i], hashMap.get(identifiedObjectsList[i]) + 1)
  1000. } else {
  1001. hashMap.set(identifiedObjectsList[i], 1)
  1002. }
  1003. }
  1004. var maxCount = 0,
  1005. objectKey = -1;
  1006. await hashMap.forEach((value, key) => {
  1007. if (maxCount < value && (key.equalsOneOf(TRANSPORT_TYPES) ||
  1008. key.equalsOneOf(LIVING_ROOM_TYPES) ||
  1009. key.equalsOneOf(ANIMAL_TYPES)|| key == VALLEY)) {
  1010. objectKey = key;
  1011. maxCount = value;
  1012. }
  1013.  
  1014. });
  1015.  
  1016. return objectKey;
  1017. }
  1018.  
  1019.  
  1020. function inputChallenge(data, imageUrl) {
  1021. try {
  1022. if ((qSelector(IMAGE_FOR_OCR).style.background).includes(imageUrl)) {
  1023. console.log(data.text);
  1024. var targetNode = qSelector(CHALLENGE_INPUT_FIELD);
  1025. targetNode.value = data.text.replaceAll("\n", "");
  1026. var challengeInput = qSelector(CHALLENGE_INPUT);
  1027. triggerEvent(challengeInput, 'input');
  1028. // Set a timeout if you want to see the text
  1029. qSelector(SUBMIT_BUTTON).click();
  1030. }
  1031.  
  1032. } catch (err) {
  1033. console.log(err.message);
  1034. }
  1035. }
  1036.  
  1037. async function identifyWordFromExamples() {
  1038.  
  1039. var word = -1;
  1040. if (areExampleImageUrlsChanged()) {
  1041. exampleImageList = currentExampleUrls;
  1042. if (exampleImageList.length == 0) {
  1043. return -1;
  1044. }
  1045. identifyObjectsFromImages(exampleImageList);
  1046. while (!identifyObjectsFromImagesCompleted) {
  1047. await delay(2000)
  1048. }
  1049. identifyObjectsFromImagesCompleted = false;
  1050. word = await getWordFromIdentifiedObjects(identifiedObjectsList);
  1051.  
  1052. //Word has not been identified yet, use mobile net to recognize images
  1053. if (word == -1) {
  1054. //Initialiaze MobileNet Model
  1055. await initializeTensorFlowMobilenetModel();
  1056. identifyObjectsFromImagesUsingMobileNet(exampleImageList);
  1057. while (!identifyObjectsFromImagesCompleted) {
  1058. await delay(2000)
  1059. }
  1060. identifyObjectsFromImagesCompleted = false;
  1061.  
  1062. word = getWordFromIdentifiedObjects(identifiedObjectsList);
  1063. }
  1064. return word;
  1065.  
  1066. } else {
  1067. return getWordFromIdentifiedObjects(identifiedObjectsList);
  1068. }
  1069.  
  1070. return word;
  1071. }
  1072.  
  1073. var prevObject = "";
  1074.  
  1075. function isObjectChanged() {
  1076. if (!prevObject && qSelector(PROMPT_TEXT)) {
  1077. prevObject = qSelector(PROMPT_TEXT).innerText;
  1078. return true;
  1079. }
  1080.  
  1081. if (prevObject && qSelector(PROMPT_TEXT) &&
  1082. prevObject == qSelector(PROMPT_TEXT).innerText) {
  1083. return false;
  1084. }
  1085.  
  1086. return true;
  1087.  
  1088. }
  1089.  
  1090.  
  1091. async function identifyWord() {
  1092. var word = -1;
  1093. try {
  1094. if (window.location.href.includes('&hl=en') || (ENABLE_DEFAULT_LANGUAGE && DEFAULT_LANGUAGE == LANG_ENGLISH)) {
  1095. word = qSelector(PROMPT_TEXT) ? qSelector(PROMPT_TEXT).innerText : word;
  1096. if (word && (word.includes(SENTENCE_TEXT_A) || word.includes(SENTENCE_TEXT_AN))) {
  1097. word = word.replace(SENTENCE_TEXT_A, '');
  1098. word = word.replace(SENTENCE_TEXT_AN, '');
  1099. }
  1100.  
  1101. if (word.equalsOneOf(TRANSPORT_TYPES) || word == VERTICAL_RIVER) {
  1102. return word;
  1103. } else {
  1104. //Using OCR on Text for accurate result
  1105. console.log("New word or different cyrillic");
  1106. var img = await convertTextToImage(word);
  1107. word = await convertImageToText(img);
  1108. word = word.replace(SENTENCE_TEXT_A, '');
  1109. word = word.replace(SENTENCE_TEXT_AN, '');
  1110. if (word.equalsOneOf(TRANSPORT_TYPES) || word == VERTICAL_RIVER) {
  1111. return word;
  1112. } else {
  1113. if(MATCH_IMAGES_USING_TRAINER){
  1114. word = qSelector(PROMPT_TEXT) ? qSelector(PROMPT_TEXT).innerText : -1;
  1115. if(word){
  1116. img = await convertTextToImage(word);
  1117. word = await convertImageToText(img);
  1118. }
  1119. return word;
  1120. }else{
  1121. word = await identifyWordFromExamples();
  1122. }
  1123. }
  1124. }
  1125. } else {
  1126.  
  1127. //If word is not english
  1128. //Identify Images from Example
  1129. word = await identifyWordFromExamples();
  1130. }
  1131.  
  1132. } catch (e) {
  1133. console.log(e);
  1134. }
  1135.  
  1136. return word;
  1137. }
  1138.  
  1139. var prevWord = "";
  1140.  
  1141. async function selectImages() {
  1142.  
  1143. if (ENABLE_DEFAULT_LANGUAGE) {
  1144. for (let i = 0; i < qSelectorAll(LANGUAGE_SELECTOR).length; i++) {
  1145. if (qSelectorAll(LANGUAGE_SELECTOR)[i].innerText == DEFAULT_LANGUAGE) {
  1146. document.querySelectorAll(LANGUAGE_SELECTOR)[i].click();
  1147. await delay(1000);
  1148. }
  1149. }
  1150. }
  1151.  
  1152. if (qSelectorAll(IMAGE) && qSelectorAll(IMAGE).length == 9 && qSelector(NO_SELECTION).getAttribute(ARIA_HIDDEN) != true) {
  1153. selectedImageCount = 0;
  1154. try {
  1155.  
  1156. if (isObjectChanged()) {
  1157. prevWord = await identifyWord();
  1158. }
  1159.  
  1160. var word = prevWord;
  1161.  
  1162. if (word == -1 && skipCount >= MAX_SKIPS) {
  1163. console.log("Max Retries Attempted. Captcha cannot be solved");
  1164. return;
  1165. } else if (word == -1 && skipCount < MAX_SKIPS) {
  1166. skipCount++;
  1167. if (qSelector(SUBMIT_BUTTON)) {
  1168. qSelector(SUBMIT_BUTTON).click();
  1169. }
  1170. return selectImagesAfterDelay(5);
  1171. } else {
  1172. //Get Synonyms for the word
  1173. word = await getSynonyms(word);
  1174. //console.log("words are::" + word);
  1175. }
  1176.  
  1177.  
  1178. } catch (err) {
  1179. console.log(err.message);
  1180. return selectImagesAfterDelay(5);
  1181. }
  1182.  
  1183. var imageList = [];
  1184. try {
  1185. imageList = getImageList();
  1186. if (imageList.length != 9) {
  1187. //console.log("Waiting");
  1188. // Image containers are visible but there are no urls in the image
  1189. // Skip the image
  1190. if (qSelector(SUBMIT_BUTTON)) {
  1191. qSelector(SUBMIT_BUTTON).click();
  1192. }
  1193. return selectImagesAfterDelay(5);
  1194. }
  1195. } catch (err) {
  1196. console.log(err.message);
  1197. return selectImagesAfterDelay(5);
  1198. }
  1199.  
  1200. //Identifying word for seaplane and matching images
  1201. //TODO: Refactor Code to combine different models or use only one model based on accuracy
  1202. if(word && word != -1 && MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED){
  1203. for (let i = 0; i < 9; i++) {
  1204. matchImagesUsingTrainer(imageList[i], word, i);
  1205. }
  1206. }else if(word && word != -1 && USE_COLOUR_PATTERN){
  1207. for (let i = 0; i < 9; i++) {
  1208. matchImageForVerticalRiver(imageList[i], word, i);
  1209. }
  1210. }else if (word && word != -1 && USE_MOBILE_NET) {
  1211. for (let i = 0; i < 9; i++) {
  1212. matchImagesUsingTensorFlowMobileNet(imageList[i], word, i);
  1213. }
  1214. } else if (word && word != -1) {
  1215. for (var i = 0; i < 9; i++) {
  1216. if (ENABLE_TENSORFLOW) {
  1217. matchImagesUsingTensorFlow(imageList[i], word, i);
  1218. } else {
  1219. matchImages(imageList[i], word, i);
  1220. }
  1221. }
  1222. }
  1223. waitUntilImageSelection();
  1224.  
  1225. } else {
  1226. waitForImagesToAppear();
  1227. }
  1228. }
  1229.  
  1230.  
  1231. })();