SimpleCov在覆盖率报告中包含引擎代码 - Rails测试

时间:2016-06-14 14:20:56

标签: ruby-on-rails testing simplecov

我在我的rails应用程序中使用SimpleCov作为覆盖工具。 在这个应用程序中,代码被分为引擎,我根据一些输入将引擎包含在我的应用程序中。

simplecov的问题是它只为应用程序内部的代码生成覆盖率报告,它忽略了引擎上的代码。那么有人有解决方案吗?

1 个答案:

答案 0 :(得分:0)

在运行代码覆盖之前,您必须加载每个类。在Rails环境文件中添加import aiohttp import asyncio import random import requests from random import choice from bs4 import BeautifulSoup from urllib.parse import urljoin base_url = 'https://stackoverflow.com/questions/tagged/web-scraping' lead_urls = [f'https://stackoverflow.com/questions/tagged/web-scraping?sort=' \ f'newest&page={page}&pagesize=50' for page in range(1, 5)] linkList = [] proxyVault = ['103.110.37.244:36022', '180.254.218.229:8080', '110.74.197.207:50632', '1.20.101.95:49001', '200.10.193.90:8080', '173.164.26.117:3128', '103.228.118.66:43002', '178.128.231.201:3128', '1.2.169.54:55312', '181.52.85.249:31487', '97.64.135.4:8080', '190.96.214.123:53251', '52.144.107.142:31923', '45.5.224.145:52035', '89.218.22.178:8080', '192.241.143.186:80', '113.53.29.218:38310', '36.78.131.182:39243'] def make_ano_requests(url): proxy_url = choice(proxyVault) proxy = {'https': f'http://{proxy_url}'} try: res = requests.get(url, proxies=proxy, timeout=5) get_title(res.text) except requests.exceptions.ProxyError: if proxy_url in proxyVault: proxyVault.remove(proxy_url) print(f'kicked out bad proxy by second func: {proxy_url}') return make_ano_requests(url) def get_title(response): soup = BeautifulSoup(response, "lxml") print(soup.select_one("h1[itemprop='name'] a").text) async def fetch(session, url, proxy_url): proxy = f'http://{proxy_url}' try: async with session.get(url, proxy=proxy) as response: return await response.text() except aiohttp.client_exceptions.ClientProxyConnectionError: print(f'kicked out bad proxy by first func: {proxy_url}') proxyVault.remove(proxy_url) async def make_requests(): tasks = [] async with aiohttp.ClientSession() as session: for proxy in proxyVault: tasks.append(fetch(session, base_url, proxy)) responses = await asyncio.gather(*tasks) print(f'Usefull proxies: {proxyVault}') for res in responses: if res: soup = BeautifulSoup(res.text, "lxml") linkList.extend( [urljoin(base_url, item.get("href")) for item in soup.select(".summary .question-hyperlink")]) if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(make_requests()) for single_link in linkList: make_ano_requests(single_link)