页面加载

时间:2017-08-05 01:35:59

标签: javascript jquery html

我有一个带有#kt div的HTML页面,它有几个单选按钮。 jQuery代码应检查选中哪个单选按钮并将页面滚动到它。无论如何,加载页面时代码将不起作用。有什么帮助吗?

$(document).ready(function(){
  $('#kt > input').each(function() {
    var container = $('#kt'),
      scrollTo = $(this);

    if (scrollTo.is(':checked')) {
      var leftOffset = scrollTo.offset().left - ($(window).width() / 2 - scrollTo.width() / 2) + container.scrollLeft();

      container.animate({
        scrollLeft: leftOffset
      }, 1000);
    }
  });
});

<div id="kt" style="display: inline-block; width: 2000px">
<div style="float: left; width: 500px; text-align: center"><input name="nappi" type="radio" id="someID-6" class="listaus-valintaruutu" /></div>
<div style="float: left; width: 500px; text-align: center"><input name="nappi" type="radio" id="someID-5" class="listaus-valintaruutu" /></div>
<div style="float: left; width: 500px; text-align: center"><input name="nappi" type="radio" id="someID-4" class="listaus-valintaruutu" /></div>
<div style="float: left; width: 500px; text-align: center"><input name="nappi" type="radio" id="someID-6" class="listaus-valintaruutu" /></div>
</div>

2 个答案:

答案 0 :(得分:0)

这很好用:

$('#kaaviotaulukko input[type=radio]').each(function() {
  var container = $('#kaaviotaulukko'),
    scrollTo = $(this);

  if (scrollTo.is(':checked')) {
    var leftOffset = scrollTo.offset().left - ($(window).width() / 2 - scrollTo.width() / 2) + container.scrollLeft();

    container.animate({
      scrollLeft: leftOffset
    }, 1000);
  }
});

答案 1 :(得分:0)

必须动画到您为body计算的位置的元素是div ...不是容器$(document).ready(function(){ $('#kt input').each(function() { var container = $('#kt'), scrollTo = $(this); if (scrollTo.is(':checked')) { var leftOffset = scrollTo.offset().left - ($(window).width() / 2 - scrollTo.width() / 2) + container.scrollLeft(); console.log(leftOffset); //container.animate({ $("body").animate({ scrollLeft: leftOffset }, 1000); } }); });

我检查了一个测试...

<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>

<div id="kt" style="display: inline-block; width: 2000px">
  <div style="float: left; width: 500px; text-align: center">
    <input name="nappi" type="radio" id="someID-6" class="listaus-valintaruutu" />
  </div>
  <div style="float: left; width: 500px; text-align: center">
    <input name="nappi" type="radio" id="someID-5" class="listaus-valintaruutu" />
  </div>
  <div style="float: left; width: 500px; text-align: center">
    <input name="nappi" type="radio" id="someID-4" class="listaus-valintaruutu" checked/>
  </div>
  <div style="float: left; width: 500px; text-align: center">
    <input name="nappi" type="radio" id="someID-6" class="listaus-valintaruutu" />
  </div>
</div>
import time
import threading
from mongo_queue import MongoQueue
from downloader import Downloader
import multiprocessing


SLEEP_TIME = 1

def threaded_crawler(seed_url, delay=5, cache=None, scrape_callback=None, user_agent='wswp', proxies=None, num_retries=1, max_threads=10, timeout=60):
    """Crawl a website in multiple threads"""
    # url queues to be crawled
    crawl_queue = MongoQueue()
    crawl_queue.clear()
    crawl_queue.push(seed_url)
    downloader = Downloader(delay=delay, user_agent=user_agent, proxies=proxies, num_retries=num_retries, cache=cache, timeout=timeout)

    def process_queue():
        while True:
            try:
                url = crawl_queue.pop()
            except KeyError:
                #crawl queue is empty
                break
            else:
                html = downloader(url)
                if scrape_callback:
                    try:
                        links = scrape_callback(url, html) or []
                    except Exception as e:
                        print('Error in call back for %s, %s' % (url, e))
                    else:
                        for link in links:
                            crawl_queue.push(link)

    threads = []
    while threads or crawl_queue:
        # the craw is still active
        for thread in threads:
            if not thread.is_alive():
                threads.remove(thread)
        while len(threads) < max_threads and crawl_queue.peek():
            # can start some more threads
            thread = threading.Thread(target=process_queue)
            thread.setDaemon(True)
            thread.start()
            threads.append(thread)
        time.sleep(SLEEP_TIME)


def process_crawler(args, **kwargs):
    num_cpus = multiprocessing.cpu_count()
    print('Starting Multiprocessing.... CPU Number is ', num_cpus)
    processes = []
    for i in range(num_cpus):
        p = multiprocessing.Process(target=threaded_crawler, args=[args], kwargs=kwargs)
        p.start()
        processes.append(p)
    for p in processes:
        p.join()



Traceback (most recent call last):
Starting Multiprocessing.... CPU Number is  8
  File "C:/Users/Michael Qian/Desktop/Python/MyScraper/process_test.py", line 15, in <module>
    test(1)
  File "C:/Users/Michael Qian/Desktop/Python/MyScraper/process_test.py", line 10, in test
    process_crawler(scrape_callback.seed_url, scrape_callback=scrape_callback, cache=cache, max_threads=max_threads, timeout=10)
  File "C:\Users\Michael Qian\Desktop\Python\MyScraper\process_crawler.py", line 58, in process_crawler
    p.start()
  File "C:\Program Files\Python35\lib\multiprocessing\process.py", line 105, in start
    self._popen = self._Popen(self)
  File "C:\Program Files\Python35\lib\multiprocessing\context.py", line 212, in _Popen
    return _default_context.get_context().Process._Popen(process_obj)
  File "C:\Program Files\Python35\lib\multiprocessing\context.py", line 313, in _Popen
    return Popen(process_obj)
  File "C:\Program Files\Python35\lib\multiprocessing\popen_spawn_win32.py", line 66, in __init__
    reduction.dump(process_obj, to_child)
  File "C:\Program Files\Python35\lib\multiprocessing\reduction.py", line 59, in dump
    ForkingPickler(file, protocol).dump(obj)
TypeError: can't pickle _thread.lock objects
Traceback (most recent call last):
  File "<string>", line 1, in <module>
  File "C:\Program Files\Python35\lib\multiprocessing\spawn.py", line 106, in spawn_main
    exitcode = _main(fd)
  File "C:\Program Files\Python35\lib\multiprocessing\spawn.py", line 116, in _main
    self = pickle.load(from_parent)
EOFError: Ran out of input