无法写入CSV文件

时间:2017-04-10 12:07:50

标签: javascript arrays node.js csv

我想使用JavaScript只将已爬网的网址存储在数组中。这是我的代码:

var AxeBuilder = require('axe-webdriverjs');
var WebDriver = require('selenium-webdriver');
var json2csv = require('json2csv');
var fs = require('fs');
var crawled='';

var driver = new WebDriver.Builder()
  .forBrowser('firefox')
  .build();

driver
  .get('https://www.ally.com')
  .then(function () {
    AxeBuilder(driver)
      .analyze(function (results) {
        console.log(results.violations);
      });
  });

  const CrawlKit = require('crawlkit');
const AxeRunner = require('crawlkit-runner-axe');

const crawler = new CrawlKit('https://www.ally.com');
// You could add a finder here in order to audit a whole network of pages
crawler.addRunner('aXe', new AxeRunner());

crawler.crawl()
    .then((data) => {
        {
        console.log(JSON.stringify(data.results, true, 2));
        console.log(data.results.length);
    } (err) => console.error(err)



    var crawled = [];
          for (var i = 0; i < data.results.length; i++)
           {
            crawled.push({

              'url': data.results[i]

            });
          }

          });
    //storeCSV(crawled);
console.log(crawled);
var csv = json2csv({ data: crawled, fields: ['url'] });
storeCSV(csv);


function storeCSV(storeJSArray) {

  fs.writeFile('C:/Users/AppData/Roaming/npm/node_modules/crawl.csv', storeJSArray, function(err) {
    if (err) throw err;
    console.log('File saved!');
  });
}

预期结果:作为CSV文件

[ { description: 'Ensures the contrast between foreground and background colors
meets WCAG 2 AA contrast ratio thresholds',
    help: 'Elements must have sufficient color contrast',
    helpUrl: 'https://dequeuniversity.com/rules/axe/2.1/color-contrast?applicati
on=webdriverjs',
    id: 'color-contrast',
    impact: 'critical',
    nodes: [ [Object], [Object], [Object], [Object], [Object], [Object] ],
    tags: [ 'wcag2aa', 'wcag143' ] },
  { description: 'Ensures every id attribute value is unique',
    help: 'id attribute value must be unique',
    helpUrl: 'https://dequeuniversity.com/rules/axe/2.1/duplicate-id?application
=webdriverjs',
    id: 'duplicate-id',
    impact: 'critical',
    nodes: [ [Object], [Object], [Object] ],
    tags: [ 'wcag2a', 'wcag411' ] },
  { description: 'Ensures <iframe> and <frame> elements contain a non-empty titl
e attribute',
    help: 'Frames must have title attribute',
    helpUrl: 'https://dequeuniversity.com/rules/axe/2.1/frame-title?application=
webdriverjs',
    id: 'frame-title',
    impact: 'critical',
    nodes: [ [Object] ],
    tags: [ 'wcag2a', 'wcag241', 'section508', 'section508.22.i' ] },
  { description: 'Ensures every HTML document has a lang attribute',
    help: '<html> element must have a lang attribute',
    helpUrl: 'https://dequeuniversity.com/rules/axe/2.1/html-has-lang?applicatio
n=webdriverjs',
    id: 'html-has-lang',
    impact: 'serious',
    nodes: [ [Object] ],
    tags: [ 'wcag2a', 'wcag311' ] },
  { description: 'Ensures tabindex attribute values are not greater than 0',
    help: 'Elements should not have tabindex greater than zero',
    helpUrl: 'https://dequeuniversity.com/rules/axe/2.1/tabindex?application=web
driverjs',
    id: 'tabindex',
    impact: 'serious',
    nodes: [ [Object], [Object] ],
    tags: [ 'best-practice' ] } ]

实际行为:

'Url"

带有字段名称的空白CSV文件。它没有捕获爬行结果。我错过了什么。对此提出任何建议都会有所帮助。

1 个答案:

答案 0 :(得分:1)

您的数据中没有“url”字段。你的意思是'helpURL'吗?