HEX
Server: Apache/2.4.52 (Ubuntu)
System: Linux spn-python 5.15.0-89-generic #99-Ubuntu SMP Mon Oct 30 20:42:41 UTC 2023 x86_64
User: arjun (1000)
PHP: 8.1.2-1ubuntu2.20
Disabled: NONE
Upload Files
File: //home/arjun/projects/buyercall/node_modules/clean-css/lib/optimizer/level-2/remove-duplicates.js
var Token = require('../../tokenizer/token');

var serializeBody = require('../../writer/one-time').body;
var serializeRules = require('../../writer/one-time').rules;

function removeDuplicates(tokens) {
  var matched = {};
  var moreThanOnce = [];
  var id, token;
  var body, bodies;

  for (var i = 0, l = tokens.length; i < l; i++) {
    token = tokens[i];
    if (token[0] != Token.RULE) { continue; }

    id = serializeRules(token[1]);

    if (matched[id] && matched[id].length == 1) { moreThanOnce.push(id); } else { matched[id] = matched[id] || []; }

    matched[id].push(i);
  }

  for (i = 0, l = moreThanOnce.length; i < l; i++) {
    id = moreThanOnce[i];
    bodies = [];

    for (var j = matched[id].length - 1; j >= 0; j--) {
      token = tokens[matched[id][j]];
      body = serializeBody(token[2]);

      if (bodies.indexOf(body) > -1) { token[2] = []; } else { bodies.push(body); }
    }
  }
}

module.exports = removeDuplicates;