Dem <edema4...@gmail.com> added the comment:

It seems that even without the as_completed call it has the same problem. 


```
# -*- coding: utf-8 -*-
import dns.resolver
import concurrent.futures
from pprint import pprint
from json import json


bucket = json.load(open('30_million_strings.json','r'))


def _dns_query(target, **kwargs):
    global bucket
    resolv = dns.resolver.Resolver()
    resolv.timeout = kwargs['function']['dns_request_timeout']
    try:
        resolv.query(target + '.com', kwargs['function']['query_type'])
        with open('out.txt', 'a') as f:
            f.write(target + '\n')
    except Exception:
        pass


def run(**kwargs):
    global bucket
    temp_locals = locals()
    pprint({k: v for k, v in temp_locals.items()})

    with 
concurrent.futures.ThreadPoolExecutor(max_workers=kwargs['concurrency']['threads'])
 as executor:
        for element in bucket:
            executor.submit(kwargs['function']['name'], element, **kwargs)


run(function={'name': _dns_query, 'dns_request_timeout': 1, 'query_type': 'MX'},
    concurrency={'threads': 15})
```

----------

_______________________________________
Python tracker <rep...@bugs.python.org>
<https://bugs.python.org/issue34168>
_______________________________________
_______________________________________________
Python-bugs-list mailing list
Unsubscribe: 
https://mail.python.org/mailman/options/python-bugs-list/archive%40mail-archive.com

Reply via email to